In [1]:
import tensorflow as tf
import numpy as np
import pandas as pd
In [17]:
logit_list = [0.1, 0.5, 0.4]
label_list = [0]
In [18]:
logits = tf.Variable(np.array([logit_list]), dtype=tf.float32, name="logits")
labels = tf.Variable(np.array(label_list), dtype=tf.int32, name="labels")
In [19]:
result = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=labels, logits=logits, name='result')
In [20]:
init = tf.global_variables_initializer()
In [21]:
with tf.Session() as sess:
init.run()
ret = sess.run(result)
print(ret)
In [22]:
# Next, I am trying to implement softmax cross entropy my self
# reference: https://deepnotes.io/softmax-crossentropy
# reference: https://ml-cheatsheet.readthedocs.io/en/latest/loss_functions.html#cross-entropy
In [23]:
def softmax(logits):
exps = np.exp(logits)
return exps / np.sum(exps)
In [24]:
print(softmax(logit_list))
In [13]:
def cross_entropy(label, y_hat):
return -np.log(y_hat[label])
In [25]:
y_hat = np.array(softmax(logit_list))
In [26]:
print(cross_entropy(label_list[0], y_hat=y_hat))
We can see my implementation matches the tensorflow results